Tweedie link function tests

Data is real but true meaning is obfuscated

library(targets)
library(brms)
library(patchwork)
library(tidyverse)
source("https://raw.githubusercontent.com/open-AIMS/stats_helpers/main/R/dharma.R")
source("R/tweedie_functions.R")

target_names <- c(
  "rd_cmn_idnt",  "rd_cmn_logmu",  "rd_cmn_logmuphi", 
  "rd_rare_idnt", "rd_rare_logmu", "rd_rare_logmuphi"
)
links <- rep(c("I(mu); I(phi); I(p)", "log(mu); I(phi); I(p)", "log(mu); log(phi); I(p)"), 2)
my_dharma <- function(dharma_res, form, plot_title) {
  pres_maxn <- plot_residuals(dharma_res, form = form)
  pqq_maxn <- plot_qq_unif(dharma_res)
  pdisp_maxn <- gg_dispersion_hist(dharma_res, cutoff_hdci = 0.99, wrap_subtitle = 55)
  pzi_maxn <- gg_zero_inflation_hist(dharma_res, wrap_subtitle = 55)
  (pqq_maxn + pres_maxn) / (pdisp_maxn + pzi_maxn) + plot_annotation(title = plot_title)
}
my_ppcheck <- function(model, plot_title) {
  dens <- brms::pp_check(model, type = "dens_overlay", ndraws = 300)
  scat <- brms::pp_check(model, type = "scatter_avg")
  dens + scat + plot_annotation(title = plot_title)
}

Runtimes

runtimes <- data.frame()
for(i in 1:length(target_names)) {
  tgt <- tar_read_raw(target_names[i])
  runtimes <- rbind(runtimes, data.frame(
    model = c(target_names[i]),
    links = links[i],
    runtime = abs(round(tgt$t, 1))
  ))
}
runtimes
             model                   links  runtime
1      rd_cmn_idnt     I(mu); I(phi); I(p) 6.1 mins
2     rd_cmn_logmu   log(mu); I(phi); I(p) 5.1 mins
3  rd_cmn_logmuphi log(mu); log(phi); I(p) 5.5 mins
4     rd_rare_idnt     I(mu); I(phi); I(p) 2.3 mins
5    rd_rare_logmu   log(mu); I(phi); I(p) 4.9 mins
6 rd_rare_logmuphi log(mu); log(phi); I(p) 4.9 mins

Convergence

for(target_name in target_names) {
  print(target_name)
  tgt <- tar_read_raw(target_name)
  plot(tgt$mod, N = 10)
}
[1] "rd_cmn_idnt"

[1] "rd_cmn_logmu"

[1] "rd_cmn_logmuphi"

[1] "rd_rare_idnt"

[1] "rd_rare_logmu"

[1] "rd_rare_logmuphi"

DHARMa residuals

p <- list()
for(target_name in target_names) {
  print(target_name)
  tgt <- tar_read_raw(target_name)
  try({
    simres <- make_brms_dharma_res(tgt$mod)
    p[[target_name]] <- my_dharma(simres,form = tgt$mod$data$habitat, target_name)
  })
}
[1] "rd_cmn_idnt"
Error in tweedie::rtweedie(n = 1, mu = mu[i], power = mtheta[i], phi = phi[i]) : 
  mu must be positive.

[1] "rd_cmn_logmu"
Loading required package: DHARMa
This is DHARMa 0.4.6. For overview type '?DHARMa'. For recent changes, type news(package = 'DHARMa')
`alternative` argument not provided, using `alternative = 'two.sided'`
Warning in geom_histogram(stat = "count", binwidth = 1, fill = "black"):
Ignoring unknown parameters: `binwidth`, `bins`, and `pad`
[1] "rd_cmn_logmuphi"
`alternative` argument not provided, using `alternative = 'two.sided'`
Warning in geom_histogram(stat = "count", binwidth = 1, fill = "black"):
Ignoring unknown parameters: `binwidth`, `bins`, and `pad`
[1] "rd_rare_idnt"
Error in tweedie::rtweedie(n = 1, mu = mu[i], power = mtheta[i], phi = phi[i]) : 
  mu must be positive.

[1] "rd_rare_logmu"
`alternative` argument not provided, using `alternative = 'two.sided'`
Warning in geom_histogram(stat = "count", binwidth = 1, fill = "black"):
Ignoring unknown parameters: `binwidth`, `bins`, and `pad`
[1] "rd_rare_logmuphi"
`alternative` argument not provided, using `alternative = 'two.sided'`
Warning in geom_histogram(stat = "count", binwidth = 1, fill = "black"):
Ignoring unknown parameters: `binwidth`, `bins`, and `pad`
p
$rd_cmn_logmu


$rd_cmn_logmuphi


$rd_rare_logmu


$rd_rare_logmuphi

PP checks

p <- list()
for(target_name in target_names) {
  tgt <- tar_read_raw(target_name)
  p[[target_name]] <- my_ppcheck(tgt$mod, target_name)
}
Using all posterior draws for ppc type 'scatter_avg' by default.
Using all posterior draws for ppc type 'scatter_avg' by default.
Using all posterior draws for ppc type 'scatter_avg' by default.
Using all posterior draws for ppc type 'scatter_avg' by default.
Using all posterior draws for ppc type 'scatter_avg' by default.
Using all posterior draws for ppc type 'scatter_avg' by default.
p
$rd_cmn_idnt


$rd_cmn_logmu


$rd_cmn_logmuphi


$rd_rare_idnt


$rd_rare_logmu


$rd_rare_logmuphi

Model summary

s <- list()
for(target_name in target_names) {
  print(target_name)
  s[[target_name]] <- summary(tar_read_raw(target_name)$mod)
}
[1] "rd_cmn_idnt"
Warning: There were 544 divergent transitions after warmup. Increasing
adapt_delta above 0.8 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
[1] "rd_cmn_logmu"
Warning: There were 84 divergent transitions after warmup. Increasing
adapt_delta above 0.8 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
[1] "rd_cmn_logmuphi"
Warning: There were 60 divergent transitions after warmup. Increasing
adapt_delta above 0.8 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
[1] "rd_rare_idnt"
Warning: Parts of the model have not converged (some Rhats are > 1.05). Be
careful when analysing the results! We recommend running more iterations and/or
setting stronger priors.
Warning: There were 3962 divergent transitions after warmup. Increasing
adapt_delta above 0.8 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
[1] "rd_rare_logmu"
Warning: There were 52 divergent transitions after warmup. Increasing
adapt_delta above 0.8 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
[1] "rd_rare_logmuphi"
Warning: There were 34 divergent transitions after warmup. Increasing
adapt_delta above 0.8 may help. See
http://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
s
$rd_cmn_idnt
 Family: tweedie 
  Links: mu = identity; phi = identity; mtheta = identity 
Formula: biomass ~ habitat + (1 | site/subsite) + (1 | year) 
   Data: data (Number of observations: 103) 
  Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup draws = 4000

Group-Level Effects: 
~site (Number of levels: 4) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     1.36      1.05     0.07     4.07 1.00     1105     1332

~site:subsite (Number of levels: 20) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.62      0.48     0.02     1.77 1.00     1286     1632

~year (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     1.85      1.32     0.20     5.16 1.00     1198     1108

Population-Level Effects: 
            Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept       5.31      1.54     1.91     8.39 1.00     1078     1053
habitatReef    -1.11      0.93    -3.02     0.63 1.00     2560     1974

Family Specific Parameters: 
       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
phi        1.34      0.17     1.05     1.70 1.00     2568     2547
mtheta     1.70      0.07     1.56     1.82 1.00     2912     2412

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).

$rd_cmn_logmu
 Family: tweedie 
  Links: mu = log; phi = identity; mtheta = identity 
Formula: biomass ~ habitat + (1 | site/subsite) + (1 | year) 
   Data: data (Number of observations: 103) 
  Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup draws = 4000

Group-Level Effects: 
~site (Number of levels: 4) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.40      0.38     0.02     1.41 1.00     1137     1473

~site:subsite (Number of levels: 20) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.16      0.12     0.01     0.45 1.00     1007      901

~year (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.71      0.77     0.07     3.56 1.02      246       77

Population-Level Effects: 
            Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept       1.61      0.55    -0.01     2.64 1.03      259       75
habitatReef    -0.27      0.20    -0.64     0.15 1.02      363       79

Family Specific Parameters: 
       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
phi        1.33      0.17     1.04     1.70 1.01      937     1488
mtheta     1.70      0.07     1.55     1.81 1.00     2160     2158

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).

$rd_cmn_logmuphi
 Family: tweedie 
  Links: mu = log; phi = identity; mtheta = identity 
Formula: biomass ~ habitat + (1 | site/subsite) + (1 | year) 
   Data: data (Number of observations: 103) 
  Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup draws = 4000

Group-Level Effects: 
~site (Number of levels: 4) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.42      0.38     0.02     1.50 1.00      799      687

~site:subsite (Number of levels: 20) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.17      0.12     0.01     0.45 1.00     1350     1590

~year (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.74      0.68     0.08     2.71 1.01      404      206

Population-Level Effects: 
            Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept       1.64      0.56     0.31     2.76 1.01      478      173
habitatReef    -0.28      0.18    -0.64     0.08 1.00     1881     2401

Family Specific Parameters: 
       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
phi        1.34      0.17     1.04     1.72 1.00     3788     2824
mtheta     1.69      0.07     1.54     1.81 1.00     1042      301

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).

$rd_rare_idnt
 Family: tweedie 
  Links: mu = identity; phi = identity; mtheta = identity 
Formula: biomass ~ habitat + (1 | site/subsite) + (1 | year) 
   Data: data (Number of observations: 103) 
  Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup draws = 4000

Group-Level Effects: 
~site (Number of levels: 4) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.36      0.41     0.01     1.44 1.56        7       53

~site:subsite (Number of levels: 20) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.10      0.11     0.00     0.39 1.38        9       17

~year (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.75      0.60     0.06     2.29 1.30       11       26

Population-Level Effects: 
            Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept       0.70      0.40    -0.30     1.47 1.34        9       14
habitatReef     0.44      0.29    -0.07     1.08 1.16       85       47

Family Specific Parameters: 
       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
phi        2.25      0.33     1.69     3.01 1.06       49      166
mtheta     1.39      0.05     1.29     1.50 1.09       81      175

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).

$rd_rare_logmu
 Family: tweedie 
  Links: mu = log; phi = identity; mtheta = identity 
Formula: biomass ~ habitat + (1 | site/subsite) + (1 | year) 
   Data: data (Number of observations: 103) 
  Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup draws = 4000

Group-Level Effects: 
~site (Number of levels: 4) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.62      0.54     0.03     2.00 1.00     1163     1315

~site:subsite (Number of levels: 20) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.35      0.22     0.02     0.83 1.00     1244     1697

~year (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     3.34      1.77     1.04     7.94 1.00     2107     1801

Population-Level Effects: 
            Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept      -1.84      1.59    -5.32     1.12 1.00     1610     1712
habitatReef     0.44      0.30    -0.14     1.02 1.00     5229     2952

Family Specific Parameters: 
       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
phi        1.93      0.30     1.42     2.60 1.00     2884     2806
mtheta     1.36      0.06     1.25     1.49 1.00     3039     2758

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).

$rd_rare_logmuphi
 Family: tweedie 
  Links: mu = log; phi = identity; mtheta = identity 
Formula: biomass ~ habitat + (1 | site/subsite) + (1 | year) 
   Data: data (Number of observations: 103) 
  Draws: 4 chains, each with iter = 2000; warmup = 1000; thin = 1;
         total post-warmup draws = 4000

Group-Level Effects: 
~site (Number of levels: 4) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.60      0.55     0.02     2.06 1.00     1169     1409

~site:subsite (Number of levels: 20) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     0.35      0.22     0.02     0.82 1.00     1294     1594

~year (Number of levels: 3) 
              Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
sd(Intercept)     3.33      1.80     1.00     8.08 1.00     1920     2182

Population-Level Effects: 
            Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
Intercept      -1.75      1.65    -5.34     1.42 1.00     1585     1782
habitatReef     0.44      0.29    -0.14     0.99 1.00     5293     2478

Family Specific Parameters: 
       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
phi        1.93      0.31     1.45     2.63 1.00     2240     2165
mtheta     1.36      0.06     1.25     1.49 1.00     2421     2218

Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
and Tail_ESS are effective sample size measures, and Rhat is the potential
scale reduction factor on split chains (at convergence, Rhat = 1).

Stancode

sc <- list()
for(target_name in target_names) {
  sc[[target_name]] <- stancode(tar_read_raw(target_name)$mod)
}
sc
$rd_cmn_idnt
// generated with brms 2.20.4
functions {
  
  int num_non_zero_fun(vector y) {
    int A = 0;
    int N = num_elements(y);
    for (n in 1 : N) {
      if (y[n] != 0) {
        A += 1;
      }
    }
    return A;
  }
  
  array[] int non_zero_index_fun(vector y, int A) {
    int N = num_elements(y);
    array[A] int non_zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] != 0) {
        counter += 1;
        non_zero_index[counter] = n;
      }
    }
    return non_zero_index;
  }
  
  array[] int zero_index_fun(vector y, int Z) {
    int N = num_elements(y);
    array[Z] int zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] == 0) {
        counter += 1;
        zero_index[counter] = n;
      }
    }
    return zero_index;
  }
  
  void check_tweedie(real mu, real phi, real mtheta) {
    if (mu < 0) {
      reject("mu must be >= 0; found mu =", mu);
    }
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
  }
  
  void check_tweedie(vector mu, real phi, real mtheta) {
    int N = num_elements(mu);
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
    for (n in 1 : N) {
      if (mu[n] < 0) {
        reject("mu must be >= 0; found mu =", mu[n], "on element", n);
      }
    }
  }
  
  real tweedie_lpdf(vector y, vector mu, real phi, real mtheta, int M) {
    check_tweedie(mu, phi, mtheta);
    int N = num_elements(y);
    int N_non_zero = num_non_zero_fun(y);
    int N_zero = N - N_non_zero;
    array[N_zero] int zero_index = zero_index_fun(y, N_zero);
    array[N_non_zero] int non_zero_index = non_zero_index_fun(y, N_non_zero);
    int A = num_elements(non_zero_index);
    int NmA = N - A;
    vector[N] lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    vector[N] beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    real lp = -sum(lambda[zero_index]);
    for (n in 1 : A) {
      vector[M] ps;
      for (m in 1 : M) {
        ps[m] = poisson_lpmf(m | lambda[n]) + gamma_lpdf(y[non_zero_index[n]] | m * alpha, beta[n]);
      }
      lp += log_sum_exp(ps);
    }
    return lp;
  }
  
  real tweedie_rng(real mu, real phi, real mtheta) {
    check_tweedie(mu, phi, mtheta);
    real lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    real beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    int N = poisson_rng(lambda);
    real tweedie_val;
    if (mtheta == 1) {
      return phi * poisson_rng(mu / phi);
    }
    if (mtheta == 2) {
      return gamma_rng(1 / phi, beta);
    }
    if (N * alpha == 0) {
      return 0.;
    }
    return gamma_rng(N * alpha, beta);
  }

}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  int<lower=1> Kc;  // number of population-level effects after centering
  // data for group-level effects of ID 1
  int<lower=1> N_1;  // number of grouping levels
  int<lower=1> M_1;  // number of coefficients per level
  array[N] int<lower=1> J_1;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_1_1;
  // data for group-level effects of ID 2
  int<lower=1> N_2;  // number of grouping levels
  int<lower=1> M_2;  // number of coefficients per level
  array[N] int<lower=1> J_2;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_2_1;
  // data for group-level effects of ID 3
  int<lower=1> N_3;  // number of grouping levels
  int<lower=1> M_3;  // number of coefficients per level
  array[N] int<lower=1> J_3;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_3_1;
  int prior_only;  // should the likelihood be ignored?
  int<lower=1> M;
}
transformed data {
  matrix[N, Kc] Xc;  // centered version of X without an intercept
  vector[Kc] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Kc] b;  // regression coefficients
  real Intercept;  // temporary intercept for centered predictors
  real<lower=0> phi;  // precision parameter
  real<lower=1,upper=2> mtheta;
  vector<lower=0>[M_1] sd_1;  // group-level standard deviations
  array[M_1] vector[N_1] z_1;  // standardized group-level effects
  vector<lower=0>[M_2] sd_2;  // group-level standard deviations
  array[M_2] vector[N_2] z_2;  // standardized group-level effects
  vector<lower=0>[M_3] sd_3;  // group-level standard deviations
  array[M_3] vector[N_3] z_3;  // standardized group-level effects
}
transformed parameters {
  vector[N_1] r_1_1;  // actual group-level effects
  vector[N_2] r_2_1;  // actual group-level effects
  vector[N_3] r_3_1;  // actual group-level effects
  real lprior = 0;  // prior contributions to the log posterior
  r_1_1 = (sd_1[1] * (z_1[1]));
  r_2_1 = (sd_2[1] * (z_2[1]));
  r_3_1 = (sd_3[1] * (z_3[1]));
  lprior += student_t_lpdf(Intercept | 3, 3, 3.1);
  lprior += gamma_lpdf(phi | 0.01, 0.01);
  lprior += student_t_lpdf(sd_1 | 3, 0, 3.1)
    - 1 * student_t_lccdf(0 | 3, 0, 3.1);
  lprior += student_t_lpdf(sd_2 | 3, 0, 3.1)
    - 1 * student_t_lccdf(0 | 3, 0, 3.1);
  lprior += student_t_lpdf(sd_3 | 3, 0, 3.1)
    - 1 * student_t_lccdf(0 | 3, 0, 3.1);
}
model {
  // likelihood including constants
  if (!prior_only) {
    // initialize linear predictor term
    vector[N] mu = rep_vector(0.0, N);
    mu += Intercept + Xc * b;
    for (n in 1:N) {
      // add more terms to the linear predictor
      mu[n] += r_1_1[J_1[n]] * Z_1_1[n] + r_2_1[J_2[n]] * Z_2_1[n] + r_3_1[J_3[n]] * Z_3_1[n];
    }
    target += tweedie_lpdf(Y | mu, phi, mtheta, M);
  }
  // priors including constants
  target += lprior;
  target += std_normal_lpdf(z_1[1]);
  target += std_normal_lpdf(z_2[1]);
  target += std_normal_lpdf(z_3[1]);
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = Intercept - dot_product(means_X, b);
}

$rd_cmn_logmu
// generated with brms 2.20.4
functions {
  
  int num_non_zero_fun(vector y) {
    int A = 0;
    int N = num_elements(y);
    for (n in 1 : N) {
      if (y[n] != 0) {
        A += 1;
      }
    }
    return A;
  }
  
  array[] int non_zero_index_fun(vector y, int A) {
    int N = num_elements(y);
    array[A] int non_zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] != 0) {
        counter += 1;
        non_zero_index[counter] = n;
      }
    }
    return non_zero_index;
  }
  
  array[] int zero_index_fun(vector y, int Z) {
    int N = num_elements(y);
    array[Z] int zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] == 0) {
        counter += 1;
        zero_index[counter] = n;
      }
    }
    return zero_index;
  }
  
  void check_tweedie(real mu, real phi, real mtheta) {
    if (mu < 0) {
      reject("mu must be >= 0; found mu =", mu);
    }
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
  }
  
  void check_tweedie(vector mu, real phi, real mtheta) {
    int N = num_elements(mu);
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
    for (n in 1 : N) {
      if (mu[n] < 0) {
        reject("mu must be >= 0; found mu =", mu[n], "on element", n);
      }
    }
  }
  
  real tweedie_lpdf(vector y, vector mu, real phi, real mtheta, int M) {
    check_tweedie(mu, phi, mtheta);
    int N = num_elements(y);
    int N_non_zero = num_non_zero_fun(y);
    int N_zero = N - N_non_zero;
    array[N_zero] int zero_index = zero_index_fun(y, N_zero);
    array[N_non_zero] int non_zero_index = non_zero_index_fun(y, N_non_zero);
    int A = num_elements(non_zero_index);
    int NmA = N - A;
    vector[N] lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    vector[N] beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    real lp = -sum(lambda[zero_index]);
    for (n in 1 : A) {
      vector[M] ps;
      for (m in 1 : M) {
        ps[m] = poisson_lpmf(m | lambda[n]) + gamma_lpdf(y[non_zero_index[n]] | m * alpha, beta[n]);
      }
      lp += log_sum_exp(ps);
    }
    return lp;
  }
  
  real tweedie_rng(real mu, real phi, real mtheta) {
    check_tweedie(mu, phi, mtheta);
    real lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    real beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    int N = poisson_rng(lambda);
    real tweedie_val;
    if (mtheta == 1) {
      return phi * poisson_rng(mu / phi);
    }
    if (mtheta == 2) {
      return gamma_rng(1 / phi, beta);
    }
    if (N * alpha == 0) {
      return 0.;
    }
    return gamma_rng(N * alpha, beta);
  }

}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  int<lower=1> Kc;  // number of population-level effects after centering
  // data for group-level effects of ID 1
  int<lower=1> N_1;  // number of grouping levels
  int<lower=1> M_1;  // number of coefficients per level
  array[N] int<lower=1> J_1;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_1_1;
  // data for group-level effects of ID 2
  int<lower=1> N_2;  // number of grouping levels
  int<lower=1> M_2;  // number of coefficients per level
  array[N] int<lower=1> J_2;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_2_1;
  // data for group-level effects of ID 3
  int<lower=1> N_3;  // number of grouping levels
  int<lower=1> M_3;  // number of coefficients per level
  array[N] int<lower=1> J_3;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_3_1;
  int prior_only;  // should the likelihood be ignored?
  int<lower=1> M;
}
transformed data {
  matrix[N, Kc] Xc;  // centered version of X without an intercept
  vector[Kc] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Kc] b;  // regression coefficients
  real Intercept;  // temporary intercept for centered predictors
  real<lower=0> phi;  // precision parameter
  real<lower=1,upper=2> mtheta;
  vector<lower=0>[M_1] sd_1;  // group-level standard deviations
  array[M_1] vector[N_1] z_1;  // standardized group-level effects
  vector<lower=0>[M_2] sd_2;  // group-level standard deviations
  array[M_2] vector[N_2] z_2;  // standardized group-level effects
  vector<lower=0>[M_3] sd_3;  // group-level standard deviations
  array[M_3] vector[N_3] z_3;  // standardized group-level effects
}
transformed parameters {
  vector[N_1] r_1_1;  // actual group-level effects
  vector[N_2] r_2_1;  // actual group-level effects
  vector[N_3] r_3_1;  // actual group-level effects
  real lprior = 0;  // prior contributions to the log posterior
  r_1_1 = (sd_1[1] * (z_1[1]));
  r_2_1 = (sd_2[1] * (z_2[1]));
  r_3_1 = (sd_3[1] * (z_3[1]));
  lprior += student_t_lpdf(Intercept | 3, 1.1, 2.5);
  lprior += gamma_lpdf(phi | 0.01, 0.01);
  lprior += student_t_lpdf(sd_1 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_2 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_3 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
}
model {
  // likelihood including constants
  if (!prior_only) {
    // initialize linear predictor term
    vector[N] mu = rep_vector(0.0, N);
    mu += Intercept + Xc * b;
    for (n in 1:N) {
      // add more terms to the linear predictor
      mu[n] += r_1_1[J_1[n]] * Z_1_1[n] + r_2_1[J_2[n]] * Z_2_1[n] + r_3_1[J_3[n]] * Z_3_1[n];
    }
    mu = exp(mu);
    target += tweedie_lpdf(Y | mu, phi, mtheta, M);
  }
  // priors including constants
  target += lprior;
  target += std_normal_lpdf(z_1[1]);
  target += std_normal_lpdf(z_2[1]);
  target += std_normal_lpdf(z_3[1]);
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = Intercept - dot_product(means_X, b);
}

$rd_cmn_logmuphi
// generated with brms 2.20.4
functions {
  
  int num_non_zero_fun(vector y) {
    int A = 0;
    int N = num_elements(y);
    for (n in 1 : N) {
      if (y[n] != 0) {
        A += 1;
      }
    }
    return A;
  }
  
  array[] int non_zero_index_fun(vector y, int A) {
    int N = num_elements(y);
    array[A] int non_zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] != 0) {
        counter += 1;
        non_zero_index[counter] = n;
      }
    }
    return non_zero_index;
  }
  
  array[] int zero_index_fun(vector y, int Z) {
    int N = num_elements(y);
    array[Z] int zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] == 0) {
        counter += 1;
        zero_index[counter] = n;
      }
    }
    return zero_index;
  }
  
  void check_tweedie(real mu, real phi, real mtheta) {
    if (mu < 0) {
      reject("mu must be >= 0; found mu =", mu);
    }
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
  }
  
  void check_tweedie(vector mu, real phi, real mtheta) {
    int N = num_elements(mu);
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
    for (n in 1 : N) {
      if (mu[n] < 0) {
        reject("mu must be >= 0; found mu =", mu[n], "on element", n);
      }
    }
  }
  
  real tweedie_lpdf(vector y, vector mu, real phi, real mtheta, int M) {
    check_tweedie(mu, phi, mtheta);
    int N = num_elements(y);
    int N_non_zero = num_non_zero_fun(y);
    int N_zero = N - N_non_zero;
    array[N_zero] int zero_index = zero_index_fun(y, N_zero);
    array[N_non_zero] int non_zero_index = non_zero_index_fun(y, N_non_zero);
    int A = num_elements(non_zero_index);
    int NmA = N - A;
    vector[N] lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    vector[N] beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    real lp = -sum(lambda[zero_index]);
    for (n in 1 : A) {
      vector[M] ps;
      for (m in 1 : M) {
        ps[m] = poisson_lpmf(m | lambda[n]) + gamma_lpdf(y[non_zero_index[n]] | m * alpha, beta[n]);
      }
      lp += log_sum_exp(ps);
    }
    return lp;
  }
  
  real tweedie_rng(real mu, real phi, real mtheta) {
    check_tweedie(mu, phi, mtheta);
    real lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    real beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    int N = poisson_rng(lambda);
    real tweedie_val;
    if (mtheta == 1) {
      return phi * poisson_rng(mu / phi);
    }
    if (mtheta == 2) {
      return gamma_rng(1 / phi, beta);
    }
    if (N * alpha == 0) {
      return 0.;
    }
    return gamma_rng(N * alpha, beta);
  }

}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  int<lower=1> Kc;  // number of population-level effects after centering
  // data for group-level effects of ID 1
  int<lower=1> N_1;  // number of grouping levels
  int<lower=1> M_1;  // number of coefficients per level
  array[N] int<lower=1> J_1;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_1_1;
  // data for group-level effects of ID 2
  int<lower=1> N_2;  // number of grouping levels
  int<lower=1> M_2;  // number of coefficients per level
  array[N] int<lower=1> J_2;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_2_1;
  // data for group-level effects of ID 3
  int<lower=1> N_3;  // number of grouping levels
  int<lower=1> M_3;  // number of coefficients per level
  array[N] int<lower=1> J_3;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_3_1;
  int prior_only;  // should the likelihood be ignored?
  int<lower=1> M;
}
transformed data {
  matrix[N, Kc] Xc;  // centered version of X without an intercept
  vector[Kc] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Kc] b;  // regression coefficients
  real Intercept;  // temporary intercept for centered predictors
  real<lower=0> phi;  // precision parameter
  real<lower=1,upper=2> mtheta;
  vector<lower=0>[M_1] sd_1;  // group-level standard deviations
  array[M_1] vector[N_1] z_1;  // standardized group-level effects
  vector<lower=0>[M_2] sd_2;  // group-level standard deviations
  array[M_2] vector[N_2] z_2;  // standardized group-level effects
  vector<lower=0>[M_3] sd_3;  // group-level standard deviations
  array[M_3] vector[N_3] z_3;  // standardized group-level effects
}
transformed parameters {
  vector[N_1] r_1_1;  // actual group-level effects
  vector[N_2] r_2_1;  // actual group-level effects
  vector[N_3] r_3_1;  // actual group-level effects
  real lprior = 0;  // prior contributions to the log posterior
  r_1_1 = (sd_1[1] * (z_1[1]));
  r_2_1 = (sd_2[1] * (z_2[1]));
  r_3_1 = (sd_3[1] * (z_3[1]));
  lprior += student_t_lpdf(Intercept | 3, 1.1, 2.5);
  lprior += gamma_lpdf(phi | 0.01, 0.01);
  lprior += student_t_lpdf(sd_1 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_2 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_3 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
}
model {
  // likelihood including constants
  if (!prior_only) {
    // initialize linear predictor term
    vector[N] mu = rep_vector(0.0, N);
    mu += Intercept + Xc * b;
    for (n in 1:N) {
      // add more terms to the linear predictor
      mu[n] += r_1_1[J_1[n]] * Z_1_1[n] + r_2_1[J_2[n]] * Z_2_1[n] + r_3_1[J_3[n]] * Z_3_1[n];
    }
    mu = exp(mu);
    target += tweedie_lpdf(Y | mu, phi, mtheta, M);
  }
  // priors including constants
  target += lprior;
  target += std_normal_lpdf(z_1[1]);
  target += std_normal_lpdf(z_2[1]);
  target += std_normal_lpdf(z_3[1]);
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = Intercept - dot_product(means_X, b);
}

$rd_rare_idnt
// generated with brms 2.20.4
functions {
  
  int num_non_zero_fun(vector y) {
    int A = 0;
    int N = num_elements(y);
    for (n in 1 : N) {
      if (y[n] != 0) {
        A += 1;
      }
    }
    return A;
  }
  
  array[] int non_zero_index_fun(vector y, int A) {
    int N = num_elements(y);
    array[A] int non_zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] != 0) {
        counter += 1;
        non_zero_index[counter] = n;
      }
    }
    return non_zero_index;
  }
  
  array[] int zero_index_fun(vector y, int Z) {
    int N = num_elements(y);
    array[Z] int zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] == 0) {
        counter += 1;
        zero_index[counter] = n;
      }
    }
    return zero_index;
  }
  
  void check_tweedie(real mu, real phi, real mtheta) {
    if (mu < 0) {
      reject("mu must be >= 0; found mu =", mu);
    }
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
  }
  
  void check_tweedie(vector mu, real phi, real mtheta) {
    int N = num_elements(mu);
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
    for (n in 1 : N) {
      if (mu[n] < 0) {
        reject("mu must be >= 0; found mu =", mu[n], "on element", n);
      }
    }
  }
  
  real tweedie_lpdf(vector y, vector mu, real phi, real mtheta, int M) {
    check_tweedie(mu, phi, mtheta);
    int N = num_elements(y);
    int N_non_zero = num_non_zero_fun(y);
    int N_zero = N - N_non_zero;
    array[N_zero] int zero_index = zero_index_fun(y, N_zero);
    array[N_non_zero] int non_zero_index = non_zero_index_fun(y, N_non_zero);
    int A = num_elements(non_zero_index);
    int NmA = N - A;
    vector[N] lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    vector[N] beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    real lp = -sum(lambda[zero_index]);
    for (n in 1 : A) {
      vector[M] ps;
      for (m in 1 : M) {
        ps[m] = poisson_lpmf(m | lambda[n]) + gamma_lpdf(y[non_zero_index[n]] | m * alpha, beta[n]);
      }
      lp += log_sum_exp(ps);
    }
    return lp;
  }
  
  real tweedie_rng(real mu, real phi, real mtheta) {
    check_tweedie(mu, phi, mtheta);
    real lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    real beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    int N = poisson_rng(lambda);
    real tweedie_val;
    if (mtheta == 1) {
      return phi * poisson_rng(mu / phi);
    }
    if (mtheta == 2) {
      return gamma_rng(1 / phi, beta);
    }
    if (N * alpha == 0) {
      return 0.;
    }
    return gamma_rng(N * alpha, beta);
  }

}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  int<lower=1> Kc;  // number of population-level effects after centering
  // data for group-level effects of ID 1
  int<lower=1> N_1;  // number of grouping levels
  int<lower=1> M_1;  // number of coefficients per level
  array[N] int<lower=1> J_1;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_1_1;
  // data for group-level effects of ID 2
  int<lower=1> N_2;  // number of grouping levels
  int<lower=1> M_2;  // number of coefficients per level
  array[N] int<lower=1> J_2;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_2_1;
  // data for group-level effects of ID 3
  int<lower=1> N_3;  // number of grouping levels
  int<lower=1> M_3;  // number of coefficients per level
  array[N] int<lower=1> J_3;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_3_1;
  int prior_only;  // should the likelihood be ignored?
  int<lower=1> M;
}
transformed data {
  matrix[N, Kc] Xc;  // centered version of X without an intercept
  vector[Kc] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Kc] b;  // regression coefficients
  real Intercept;  // temporary intercept for centered predictors
  real<lower=0> phi;  // precision parameter
  real<lower=1,upper=2> mtheta;
  vector<lower=0>[M_1] sd_1;  // group-level standard deviations
  array[M_1] vector[N_1] z_1;  // standardized group-level effects
  vector<lower=0>[M_2] sd_2;  // group-level standard deviations
  array[M_2] vector[N_2] z_2;  // standardized group-level effects
  vector<lower=0>[M_3] sd_3;  // group-level standard deviations
  array[M_3] vector[N_3] z_3;  // standardized group-level effects
}
transformed parameters {
  vector[N_1] r_1_1;  // actual group-level effects
  vector[N_2] r_2_1;  // actual group-level effects
  vector[N_3] r_3_1;  // actual group-level effects
  real lprior = 0;  // prior contributions to the log posterior
  r_1_1 = (sd_1[1] * (z_1[1]));
  r_2_1 = (sd_2[1] * (z_2[1]));
  r_3_1 = (sd_3[1] * (z_3[1]));
  lprior += student_t_lpdf(Intercept | 3, 0.3, 2.5);
  lprior += gamma_lpdf(phi | 0.01, 0.01);
  lprior += student_t_lpdf(sd_1 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_2 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_3 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
}
model {
  // likelihood including constants
  if (!prior_only) {
    // initialize linear predictor term
    vector[N] mu = rep_vector(0.0, N);
    mu += Intercept + Xc * b;
    for (n in 1:N) {
      // add more terms to the linear predictor
      mu[n] += r_1_1[J_1[n]] * Z_1_1[n] + r_2_1[J_2[n]] * Z_2_1[n] + r_3_1[J_3[n]] * Z_3_1[n];
    }
    target += tweedie_lpdf(Y | mu, phi, mtheta, M);
  }
  // priors including constants
  target += lprior;
  target += std_normal_lpdf(z_1[1]);
  target += std_normal_lpdf(z_2[1]);
  target += std_normal_lpdf(z_3[1]);
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = Intercept - dot_product(means_X, b);
}

$rd_rare_logmu
// generated with brms 2.20.4
functions {
  
  int num_non_zero_fun(vector y) {
    int A = 0;
    int N = num_elements(y);
    for (n in 1 : N) {
      if (y[n] != 0) {
        A += 1;
      }
    }
    return A;
  }
  
  array[] int non_zero_index_fun(vector y, int A) {
    int N = num_elements(y);
    array[A] int non_zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] != 0) {
        counter += 1;
        non_zero_index[counter] = n;
      }
    }
    return non_zero_index;
  }
  
  array[] int zero_index_fun(vector y, int Z) {
    int N = num_elements(y);
    array[Z] int zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] == 0) {
        counter += 1;
        zero_index[counter] = n;
      }
    }
    return zero_index;
  }
  
  void check_tweedie(real mu, real phi, real mtheta) {
    if (mu < 0) {
      reject("mu must be >= 0; found mu =", mu);
    }
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
  }
  
  void check_tweedie(vector mu, real phi, real mtheta) {
    int N = num_elements(mu);
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
    for (n in 1 : N) {
      if (mu[n] < 0) {
        reject("mu must be >= 0; found mu =", mu[n], "on element", n);
      }
    }
  }
  
  real tweedie_lpdf(vector y, vector mu, real phi, real mtheta, int M) {
    check_tweedie(mu, phi, mtheta);
    int N = num_elements(y);
    int N_non_zero = num_non_zero_fun(y);
    int N_zero = N - N_non_zero;
    array[N_zero] int zero_index = zero_index_fun(y, N_zero);
    array[N_non_zero] int non_zero_index = non_zero_index_fun(y, N_non_zero);
    int A = num_elements(non_zero_index);
    int NmA = N - A;
    vector[N] lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    vector[N] beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    real lp = -sum(lambda[zero_index]);
    for (n in 1 : A) {
      vector[M] ps;
      for (m in 1 : M) {
        ps[m] = poisson_lpmf(m | lambda[n]) + gamma_lpdf(y[non_zero_index[n]] | m * alpha, beta[n]);
      }
      lp += log_sum_exp(ps);
    }
    return lp;
  }
  
  real tweedie_rng(real mu, real phi, real mtheta) {
    check_tweedie(mu, phi, mtheta);
    real lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    real beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    int N = poisson_rng(lambda);
    real tweedie_val;
    if (mtheta == 1) {
      return phi * poisson_rng(mu / phi);
    }
    if (mtheta == 2) {
      return gamma_rng(1 / phi, beta);
    }
    if (N * alpha == 0) {
      return 0.;
    }
    return gamma_rng(N * alpha, beta);
  }

}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  int<lower=1> Kc;  // number of population-level effects after centering
  // data for group-level effects of ID 1
  int<lower=1> N_1;  // number of grouping levels
  int<lower=1> M_1;  // number of coefficients per level
  array[N] int<lower=1> J_1;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_1_1;
  // data for group-level effects of ID 2
  int<lower=1> N_2;  // number of grouping levels
  int<lower=1> M_2;  // number of coefficients per level
  array[N] int<lower=1> J_2;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_2_1;
  // data for group-level effects of ID 3
  int<lower=1> N_3;  // number of grouping levels
  int<lower=1> M_3;  // number of coefficients per level
  array[N] int<lower=1> J_3;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_3_1;
  int prior_only;  // should the likelihood be ignored?
  int<lower=1> M;
}
transformed data {
  matrix[N, Kc] Xc;  // centered version of X without an intercept
  vector[Kc] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Kc] b;  // regression coefficients
  real Intercept;  // temporary intercept for centered predictors
  real<lower=0> phi;  // precision parameter
  real<lower=1,upper=2> mtheta;
  vector<lower=0>[M_1] sd_1;  // group-level standard deviations
  array[M_1] vector[N_1] z_1;  // standardized group-level effects
  vector<lower=0>[M_2] sd_2;  // group-level standard deviations
  array[M_2] vector[N_2] z_2;  // standardized group-level effects
  vector<lower=0>[M_3] sd_3;  // group-level standard deviations
  array[M_3] vector[N_3] z_3;  // standardized group-level effects
}
transformed parameters {
  vector[N_1] r_1_1;  // actual group-level effects
  vector[N_2] r_2_1;  // actual group-level effects
  vector[N_3] r_3_1;  // actual group-level effects
  real lprior = 0;  // prior contributions to the log posterior
  r_1_1 = (sd_1[1] * (z_1[1]));
  r_2_1 = (sd_2[1] * (z_2[1]));
  r_3_1 = (sd_3[1] * (z_3[1]));
  lprior += student_t_lpdf(Intercept | 3, -1.3, 2.5);
  lprior += gamma_lpdf(phi | 0.01, 0.01);
  lprior += student_t_lpdf(sd_1 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_2 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_3 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
}
model {
  // likelihood including constants
  if (!prior_only) {
    // initialize linear predictor term
    vector[N] mu = rep_vector(0.0, N);
    mu += Intercept + Xc * b;
    for (n in 1:N) {
      // add more terms to the linear predictor
      mu[n] += r_1_1[J_1[n]] * Z_1_1[n] + r_2_1[J_2[n]] * Z_2_1[n] + r_3_1[J_3[n]] * Z_3_1[n];
    }
    mu = exp(mu);
    target += tweedie_lpdf(Y | mu, phi, mtheta, M);
  }
  // priors including constants
  target += lprior;
  target += std_normal_lpdf(z_1[1]);
  target += std_normal_lpdf(z_2[1]);
  target += std_normal_lpdf(z_3[1]);
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = Intercept - dot_product(means_X, b);
}

$rd_rare_logmuphi
// generated with brms 2.20.4
functions {
  
  int num_non_zero_fun(vector y) {
    int A = 0;
    int N = num_elements(y);
    for (n in 1 : N) {
      if (y[n] != 0) {
        A += 1;
      }
    }
    return A;
  }
  
  array[] int non_zero_index_fun(vector y, int A) {
    int N = num_elements(y);
    array[A] int non_zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] != 0) {
        counter += 1;
        non_zero_index[counter] = n;
      }
    }
    return non_zero_index;
  }
  
  array[] int zero_index_fun(vector y, int Z) {
    int N = num_elements(y);
    array[Z] int zero_index;
    int counter = 0;
    for (n in 1 : N) {
      if (y[n] == 0) {
        counter += 1;
        zero_index[counter] = n;
      }
    }
    return zero_index;
  }
  
  void check_tweedie(real mu, real phi, real mtheta) {
    if (mu < 0) {
      reject("mu must be >= 0; found mu =", mu);
    }
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
  }
  
  void check_tweedie(vector mu, real phi, real mtheta) {
    int N = num_elements(mu);
    if (phi < 0) {
      reject("phi must be >= 0; found phi =", phi);
    }
    if (mtheta < 1 || mtheta > 2) {
      reject("mtheta must be in [1, 2]; found mtheta =", mtheta);
    }
    for (n in 1 : N) {
      if (mu[n] < 0) {
        reject("mu must be >= 0; found mu =", mu[n], "on element", n);
      }
    }
  }
  
  real tweedie_lpdf(vector y, vector mu, real phi, real mtheta, int M) {
    check_tweedie(mu, phi, mtheta);
    int N = num_elements(y);
    int N_non_zero = num_non_zero_fun(y);
    int N_zero = N - N_non_zero;
    array[N_zero] int zero_index = zero_index_fun(y, N_zero);
    array[N_non_zero] int non_zero_index = non_zero_index_fun(y, N_non_zero);
    int A = num_elements(non_zero_index);
    int NmA = N - A;
    vector[N] lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    vector[N] beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    real lp = -sum(lambda[zero_index]);
    for (n in 1 : A) {
      vector[M] ps;
      for (m in 1 : M) {
        ps[m] = poisson_lpmf(m | lambda[n]) + gamma_lpdf(y[non_zero_index[n]] | m * alpha, beta[n]);
      }
      lp += log_sum_exp(ps);
    }
    return lp;
  }
  
  real tweedie_rng(real mu, real phi, real mtheta) {
    check_tweedie(mu, phi, mtheta);
    real lambda = 1 / phi * mu ^ (2 - mtheta) / (2 - mtheta);
    real alpha = (2 - mtheta) / (mtheta - 1);
    real beta = 1 / phi * mu ^ (1 - mtheta) / (mtheta - 1);
    int N = poisson_rng(lambda);
    real tweedie_val;
    if (mtheta == 1) {
      return phi * poisson_rng(mu / phi);
    }
    if (mtheta == 2) {
      return gamma_rng(1 / phi, beta);
    }
    if (N * alpha == 0) {
      return 0.;
    }
    return gamma_rng(N * alpha, beta);
  }

}
data {
  int<lower=1> N;  // total number of observations
  vector[N] Y;  // response variable
  int<lower=1> K;  // number of population-level effects
  matrix[N, K] X;  // population-level design matrix
  int<lower=1> Kc;  // number of population-level effects after centering
  // data for group-level effects of ID 1
  int<lower=1> N_1;  // number of grouping levels
  int<lower=1> M_1;  // number of coefficients per level
  array[N] int<lower=1> J_1;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_1_1;
  // data for group-level effects of ID 2
  int<lower=1> N_2;  // number of grouping levels
  int<lower=1> M_2;  // number of coefficients per level
  array[N] int<lower=1> J_2;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_2_1;
  // data for group-level effects of ID 3
  int<lower=1> N_3;  // number of grouping levels
  int<lower=1> M_3;  // number of coefficients per level
  array[N] int<lower=1> J_3;  // grouping indicator per observation
  // group-level predictor values
  vector[N] Z_3_1;
  int prior_only;  // should the likelihood be ignored?
  int<lower=1> M;
}
transformed data {
  matrix[N, Kc] Xc;  // centered version of X without an intercept
  vector[Kc] means_X;  // column means of X before centering
  for (i in 2:K) {
    means_X[i - 1] = mean(X[, i]);
    Xc[, i - 1] = X[, i] - means_X[i - 1];
  }
}
parameters {
  vector[Kc] b;  // regression coefficients
  real Intercept;  // temporary intercept for centered predictors
  real<lower=0> phi;  // precision parameter
  real<lower=1,upper=2> mtheta;
  vector<lower=0>[M_1] sd_1;  // group-level standard deviations
  array[M_1] vector[N_1] z_1;  // standardized group-level effects
  vector<lower=0>[M_2] sd_2;  // group-level standard deviations
  array[M_2] vector[N_2] z_2;  // standardized group-level effects
  vector<lower=0>[M_3] sd_3;  // group-level standard deviations
  array[M_3] vector[N_3] z_3;  // standardized group-level effects
}
transformed parameters {
  vector[N_1] r_1_1;  // actual group-level effects
  vector[N_2] r_2_1;  // actual group-level effects
  vector[N_3] r_3_1;  // actual group-level effects
  real lprior = 0;  // prior contributions to the log posterior
  r_1_1 = (sd_1[1] * (z_1[1]));
  r_2_1 = (sd_2[1] * (z_2[1]));
  r_3_1 = (sd_3[1] * (z_3[1]));
  lprior += student_t_lpdf(Intercept | 3, -1.3, 2.5);
  lprior += gamma_lpdf(phi | 0.01, 0.01);
  lprior += student_t_lpdf(sd_1 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_2 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
  lprior += student_t_lpdf(sd_3 | 3, 0, 2.5)
    - 1 * student_t_lccdf(0 | 3, 0, 2.5);
}
model {
  // likelihood including constants
  if (!prior_only) {
    // initialize linear predictor term
    vector[N] mu = rep_vector(0.0, N);
    mu += Intercept + Xc * b;
    for (n in 1:N) {
      // add more terms to the linear predictor
      mu[n] += r_1_1[J_1[n]] * Z_1_1[n] + r_2_1[J_2[n]] * Z_2_1[n] + r_3_1[J_3[n]] * Z_3_1[n];
    }
    mu = exp(mu);
    target += tweedie_lpdf(Y | mu, phi, mtheta, M);
  }
  // priors including constants
  target += lprior;
  target += std_normal_lpdf(z_1[1]);
  target += std_normal_lpdf(z_2[1]);
  target += std_normal_lpdf(z_3[1]);
}
generated quantities {
  // actual population-level intercept
  real b_Intercept = Intercept - dot_product(means_X, b);
}